* @file
* @defgroup JobQueue JobQueue
*/
-use MediaWiki\MediaWikiServices;
+use Liuggio\StatsdClient\Factory\StatsdDataFactoryInterface;
/**
* Class to handle enqueueing and running of background jobs
protected $maxTries;
/** @var string|bool Read only rationale (or false if r/w) */
protected $readOnlyReason;
+ /** @var StatsdDataFactoryInterface */
+ protected $stats;
/** @var BagOStuff */
protected $dupCache;
if ( !in_array( $this->order, $this->supportedOrders() ) ) {
throw new JobQueueError( __CLASS__ . " does not support '{$this->order}' order." );
}
- $this->dupCache = wfGetCache( CACHE_ANYTHING );
$this->readOnlyReason = $params['readOnlyReason'] ?? false;
+ $this->stats = $params['stats'] ?? new NullStatsdDataFactory();
+ $this->dupCache = $params['stash'] ?? new EmptyBagOStuff();
}
/**
* of jobs simply means re-inserting them into the queue. Jobs can be
* attempted up to three times before being discarded.
* - readOnlyReason : Set this to a string to make the queue read-only.
+ * - stash : A BagOStuff instance that can be used for root job deduplication
+ * - stats : A StatsdDataFactoryInterface [optional]
*
* Queue classes should throw an exception if they do not support the options given.
*
}
/**
- * @return string Wiki ID
+ * @return string Database domain ID
*/
final public function getDomain() {
return $this->domain;
* @return Job|bool Returns false if there are no jobs
*/
final public function pop() {
- global $wgJobClasses;
-
$this->assertNotReadOnly();
- if ( !WikiMap::isCurrentWikiDbDomain( $this->domain ) ) {
- throw new JobQueueError(
- "Cannot pop '{$this->type}' job off foreign '{$this->domain}' wiki queue." );
- } elseif ( !isset( $wgJobClasses[$this->type] ) ) {
- // Do not pop jobs if there is no class for the queue type
- throw new JobQueueError( "Unrecognized job type '{$this->type}'." );
- }
$job = $this->doPop();
// Flag this job as an old duplicate based on its "root" job...
try {
if ( $job && $this->isRootJobOldDuplicate( $job ) ) {
- self::incrStats( 'dupe_pops', $this->type );
+ $this->incrStats( 'dupe_pops', $this->type );
$job = DuplicateJob::newFromJob( $job ); // convert to a no-op
}
} catch ( Exception $e ) {
}
// Update the timestamp of the last root job started at the location...
- return $this->dupCache->set( $key, $params['rootJobTimestamp'], JobQueueDB::ROOTJOB_TTL );
+ return $this->dupCache->set( $key, $params['rootJobTimestamp'], self::ROOTJOB_TTL );
}
/**
* @param int $delta
* @since 1.22
*/
- public static function incrStats( $key, $type, $delta = 1 ) {
- static $stats;
- if ( !$stats ) {
- $stats = MediaWikiServices::getInstance()->getStatsdDataFactory();
- }
- $stats->updateCount( "jobqueue.{$key}.all", $delta );
- $stats->updateCount( "jobqueue.{$key}.{$type}", $delta );
+ protected function incrStats( $key, $type, $delta = 1 ) {
+ $this->stats->updateCount( "jobqueue.{$key}.all", $delta );
+ $this->stats->updateCount( "jobqueue.{$key}.{$type}", $delta );
}
}
* If not specified, the primary DB cluster for the wiki will be used.
* This can be overridden with a custom cluster so that DB handles will
* be retrieved via LBFactory::getExternalLB() and getConnection().
+ * - wanCache : An instance of WANObjectCache to use for caching.
* @param array $params
*/
protected function __construct( array $params ) {
$this->cluster = $params['cluster'];
}
- $this->cache = MediaWikiServices::getInstance()->getMainWANObjectCache();
+ $this->cache = $params['wanCache'] ?? WANObjectCache::newEmpty();
}
protected function supportedOrders() {
foreach ( array_chunk( $rows, 50 ) as $rowBatch ) {
$dbw->insert( 'job', $rowBatch, $method );
}
- JobQueue::incrStats( 'inserts', $this->type, count( $rows ) );
- JobQueue::incrStats( 'dupe_inserts', $this->type,
+ $this->incrStats( 'inserts', $this->type, count( $rows ) );
+ $this->incrStats( 'dupe_inserts', $this->type,
count( $rowSet ) + count( $rowList ) - count( $rows )
);
} catch ( DBError $e ) {
if ( !$row ) {
break; // nothing to do
}
- JobQueue::incrStats( 'pops', $this->type );
+ $this->incrStats( 'pops', $this->type );
// Get the job object from the row...
$title = Title::makeTitle( $row->job_namespace, $row->job_title );
$job = Job::factory( $row->job_cmd, $title,
__METHOD__
);
- JobQueue::incrStats( 'acks', $this->type );
+ $this->incrStats( 'acks', $this->type );
} catch ( DBError $e ) {
$this->throwDBException( $e );
}
);
$affected = $dbw->affectedRows();
$count += $affected;
- JobQueue::incrStats( 'recycles', $this->type, $affected );
+ $this->incrStats( 'recycles', $this->type, $affected );
}
}
$dbw->delete( 'job', [ 'job_id' => $ids ], __METHOD__ );
$affected = $dbw->affectedRows();
$count += $affected;
- JobQueue::incrStats( 'abandons', $this->type, $affected );
+ $this->incrStats( 'abandons', $this->type, $affected );
}
$dbw->unlock( "jobqueue-recycle-{$this->type}", __METHOD__ );
$conf['readOnlyReason'] = $this->readOnlyReason;
}
+ $services = MediaWikiServices::getInstance();
+ $conf['stats'] = $services->getStatsdDataFactory();
+ $conf['wanCache'] = $services->getMainWANObjectCache();
+ $conf['stash'] = $services->getMainObjectStash();
+
return JobQueue::factory( $conf );
}
* @return Job|bool Returns false on failure
*/
public function pop( $qtype = self::TYPE_DEFAULT, $flags = 0, array $blacklist = [] ) {
+ global $wgJobClasses;
+
$job = false;
+ if ( !WikiMap::isCurrentWikiDbDomain( $this->domain ) ) {
+ throw new JobQueueError(
+ "Cannot pop '{$qtype}' job off foreign '{$this->domain}' wiki queue." );
+ } elseif ( is_string( $qtype ) && !isset( $wgJobClasses[$qtype] ) ) {
+ // Do not pop jobs if there is no class for the queue type
+ throw new JobQueueError( "Unrecognized job type '$qtype'." );
+ }
+
if ( is_string( $qtype ) ) { // specific job type
if ( !in_array( $qtype, $blacklist ) ) {
$job = $this->get( $qtype )->pop();
/** @var array[] */
protected static $data = [];
+ public function __construct( array $params ) {
+ parent::__construct( $params );
+
+ $this->dupCache = new HashBagOStuff();
+ }
+
/**
* @see JobQueue::doBatchPush
*
foreach ( $jobs as $job ) {
if ( $job->ignoreDuplicates() ) {
- $sha1 = Wikimedia\base_convert(
- sha1( serialize( $job->getDeduplicationInfo() ) ),
- 16, 36, 31
- );
+ $sha1 = sha1( serialize( $job->getDeduplicationInfo() ) );
if ( !isset( $unclaimed[$sha1] ) ) {
$unclaimed[$sha1] = $job;
}
$failed += count( $itemBatch );
}
}
- JobQueue::incrStats( 'inserts', $this->type, count( $items ) );
- JobQueue::incrStats( 'inserts_actual', $this->type, $pushed );
- JobQueue::incrStats( 'dupe_inserts', $this->type,
+ $this->incrStats( 'inserts', $this->type, count( $items ) );
+ $this->incrStats( 'inserts_actual', $this->type, $pushed );
+ $this->incrStats( 'dupe_inserts', $this->type,
count( $items ) - $failed - $pushed );
if ( $failed > 0 ) {
$err = "Could not insert {$failed} {$this->type} job(s).";
break; // no jobs; nothing to do
}
- JobQueue::incrStats( 'pops', $this->type );
+ $this->incrStats( 'pops', $this->type );
$item = $this->unserialize( $blob );
if ( $item === false ) {
wfDebugLog( 'JobQueueRedis', "Could not unserialize {$this->type} job." );
return false;
}
- JobQueue::incrStats( 'acks', $this->type );
+ $this->incrStats( 'acks', $this->type );
} catch ( RedisException $e ) {
$this->throwRedisException( $conn, $e );
}
}
$baseConfig['type'] = 'null';
$baseConfig['domain'] = WikiMap::getCurrentWikiDbDomain()->getId();
+ $baseConfig['stash'] = new HashBagOStuff();
+ $baseConfig['wanCache'] = new WANObjectCache( [ 'cache' => new HashBagOStuff() ] );
$variants = [
'queueRand' => [ 'order' => 'random', 'claimTTL' => 0 ],
'queueRandTTL' => [ 'order' => 'random', 'claimTTL' => 10 ],